1 /*
2 Copyright: Marcelo S. N. Mancini (Hipreme|MrcSnm), 2018 - 2021
3 License:   [https://creativecommons.org/licenses/by/4.0/|CC BY-4.0 License].
4 Authors: Marcelo S. N. Mancini
5 
6 	Copyright Marcelo S. N. Mancini 2018 - 2021.
7 Distributed under the CC BY-4.0 License.
8    (See accompanying file LICENSE.txt or copy at
9 	https://creativecommons.org/licenses/by/4.0/
10 */
11 module hip.data.assetpacker;
12 
13 import hip.util.file;
14 
15 enum HapHeaderStart = "1HZ00ZH9";
16 enum HapHeaderEnd   = "9HZ00ZH1";
17 enum HapHeaderSize = HapHeaderEnd.length + HapHeaderStart.length;
18 
19 enum HapHeaderStatus
20 {
21     SUCCESS = 0, 
22     DOES_NOT_EXIST,
23     NOT_HAP
24 }
25 
26 
27 struct HapChunk
28 {
29     string fileName;
30     size_t startPosition;
31     ubyte[] bin;
32 
33     alias bin this;
34 }
35 
36 private extern(C) int sortChunk(const(void*) a, const(void*) b)
37 {
38     long startPosA = (cast(HapChunk*)a).startPosition;
39     long startPosB = (cast(HapChunk*)b).startPosition;
40     return cast(int)(startPosA - startPosB);
41 }
42 
43 
44 version(HipDStdFile):
45 
46 __EOF__
47 
48 class HapFile
49 {
50     HapChunk[string] chunks;
51     immutable string path;
52     uint fileSteps;
53     protected FileProgression fp;
54 
55     /**
56     *   Reads the entire hapfile and get its chunks synchronously
57     */
58     static HapFile get(string filePath)
59     {
60         HapFile f = new HapFile(filePath, 1);
61         f.update();
62         return f;
63     }
64 
65 
66     this(string filePath, uint fileSteps = 10)
67     {
68         this.path = filePath;
69         this.fileSteps = fileSteps;
70     }
71 
72     bool loadFromMemory(in ubyte[] data)
73     {
74         HapChunk[] ch = getHapChunks(data, getHeaderStart(data));
75         foreach(c;ch)
76             chunks[c.fileName] = c;
77         return ch.length != 0;
78     }
79 
80     string getText(string chunkName, bool removeCarriageReturn = true)
81     {
82         import hip.util.string : replaceAll;
83         HapChunk* ch = (chunkName in chunks);
84         if(ch is null)
85             return "";
86         if(!removeCarriageReturn)
87             return cast(string)ch.bin;
88         return replaceAll(cast(string)ch.bin, '\r');
89     }
90 
91     string[] getChunksList()
92     {
93         string[] ret;
94         foreach(k, v; chunks)
95             ret~=k;
96         return ret;
97     }
98     bool update(){return fp.update();}
99     float getProgress(){return fp.getProgress();}
100 
101 
102     alias chunks this;
103 }
104 
105 private string reverse(string s)
106 {
107     string ret = "";
108     foreach_reverse(c; s)
109         ret~= c;
110     return ret;
111 }
112 
113 /**
114 *
115 *   Writes an asset pack in the .hap format (Hipreme Asset Pack),
116 *   it is only sequential binary chunk containing its headers on
117 *   the file end.
118 *
119 *   Returns if the file writing was a success.
120 */
121 bool writeAssetPack(string outputFileName, string[] assetPaths, string basePath = "")
122 {
123     import hip.console.log;
124     import hip.util.conv:to;
125     import hip.util.path : relativePath;
126     import core.stdc.string : memcpy;
127     import std.file;
128     import std.stdio : File;
129     if(exists(outputFileName~".hap"))
130     {
131         rawlog(outputFileName~".hap already exists");
132         return false;
133     }
134     ubyte[] plainData;
135     size_t dataLength = 0;
136 
137     string toAppend = HapHeaderEnd;
138 
139     foreach(p; assetPaths)
140     {
141         string path = p;
142         if(basePath != "")
143             path = relativePath(path, basePath);
144 
145         if(exists(path))
146         {
147             auto _f = File(path);
148             void[] fileData = new void[](_f.size);
149             _f.rawRead(fileData);
150             dataLength+= fileData.length;
151             plainData.length = dataLength;
152             toAppend~= path~", "~to!string(dataLength-fileData.length)~"\n";
153             memcpy((plainData.ptr+dataLength-fileData.length), fileData.ptr, fileData.length);
154         }
155         else
156             rawlog("Archive at path '"~path~"' does not exists, it will be skipped");
157     }
158     toAppend~= HapHeaderStart;
159 
160     plainData.length+= toAppend.length;
161     memcpy(plainData.ptr+dataLength, toAppend.ptr, toAppend.length);
162     std.file.write(outputFileName~".hap", plainData);
163 
164     return false;
165 }
166 
167 /**
168 *   Appends the file to the asset pack. It does not check if the file is already present.
169 *   
170 *   Returns the operation status, 0 = success
171 */
172 HapHeaderStatus appendAssetInPack(string hapFile, string[] assetPaths, string basePath = "")
173 {
174     import hip.console.log;
175     import hip.util.conv:to;
176     import hip.util.path : relativePath;
177     import core.stdc.string : memcpy;
178     import std.file : exists, read;
179     import std.stdio : File;
180 
181     if(!exists(hapFile))
182         return HapHeaderStatus.DOES_NOT_EXIST;
183 
184     File f = File(hapFile, "r+");
185     ubyte[] rawData = new ubyte[f.size];
186     f.rawRead(rawData);
187 
188     size_t headerStart = getHeaderStart(rawData);
189     if(headerStart == 0)
190         return HapHeaderStatus.NOT_HAP;
191 
192     string files = "";
193     for(size_t i = headerStart; i < rawData.length - HapHeaderEnd.length; i++)
194         files~= rawData[i];
195 
196     headerStart-= HapHeaderStart.length;
197     f.seek(headerStart);
198 
199     ubyte[] dataToAppend;
200 
201     string toAppend = "";
202     foreach(p; assetPaths)
203     {
204         string finalPath = p;
205         if(basePath != "")
206             finalPath = relativePath(finalPath, basePath);
207         if(exists(finalPath))
208         {
209             ubyte[] data = cast(ubyte[])read(finalPath);
210             dataToAppend.length+= data.length;
211             memcpy(dataToAppend.ptr+(dataToAppend.length - data.length), data.ptr, data.length);
212             toAppend~= finalPath~", "~to!string(headerStart)~"\n";
213             headerStart+= data.length;
214         }
215         else
216             rawlog("File named '"~finalPath~"' does not exists, it will not be appended");
217     }
218 
219     f.rawWrite(dataToAppend);
220     f.rawWrite(HapHeaderEnd);
221     f.rawWrite(files);
222     f.rawWrite(toAppend);
223     f.rawWrite(HapHeaderStart);
224     f.close();
225     
226     return HapHeaderStatus.SUCCESS;
227     
228 }
229 
230 /**
231 *       Updates files in the assetpack, mantains the order and won't overwrite every single data,
232 *   unless the data to be updated is at the top. 
233 *   
234 *       Mantaining an intelligent system that will let the less changing files at the 
235 *   top is the way to go.
236 */
237 HapHeaderStatus updateAssetInPack(string hapFile, string[] assetPaths, string basePath = "")
238 {
239     import hip.console.log;
240     import hip.util.conv:to;
241     import hip.util.array : indexOf;
242     import hip.util.path : relativePath;
243     import std.file : exists, read;
244     import std.stdio : File;
245 
246     if(!exists(hapFile))
247         return HapHeaderStatus.DOES_NOT_EXIST;
248     File target = File(hapFile, "r+");
249     ubyte[] hapData = new ubyte[target.size];
250     target.rawRead(hapData);
251     
252     const size_t headerStart = getHeaderStart(hapData);
253     if(headerStart == 0)
254         return HapHeaderStatus.NOT_HAP;
255 
256     string[] toAppend;
257     HapChunk[] chunks = getHapChunks(hapData, headerStart);
258 
259 
260     string[] fileNames;
261     foreach(a; chunks) 
262         fileNames~= a.fileName;
263 
264     size_t lowestStartPosition = size_t.max;
265 
266     foreach(p; assetPaths)
267     {
268         string path = p;
269         if(basePath != "")  
270             path = relativePath(path, basePath);
271         if(!exists(path))
272         {
273             rawlog("File '"~path~"' does not exists");
274             continue;
275         }
276         long pathIndex = indexOf(fileNames, path);
277         if(pathIndex != -1)
278         {
279             HapChunk* f = &chunks[cast(size_t)pathIndex];
280             if(f.startPosition < lowestStartPosition)
281                 lowestStartPosition = f.startPosition;
282             ubyte[] fileData = cast(ubyte[])read(path);
283             f.bin = fileData;
284         }
285         else
286             toAppend~= path;
287     }
288 
289     import core.stdc.stdlib:qsort;
290     qsort(cast(void*)chunks.ptr, chunks.length, chunks[0].sizeof, &sortChunk);
291 
292     target.seek(lowestStartPosition);
293 
294     size_t nextStartPosition = lowestStartPosition;
295     for(int i = 0; i < chunks.length; i++)
296     {
297         if(chunks[i].startPosition >= lowestStartPosition)
298         {
299             //rawlog("Updating "~chunks[i].fileName);
300             target.rawWrite(chunks[i].bin);
301             chunks[i].startPosition = nextStartPosition;
302             nextStartPosition+= chunks[i].bin.length;
303         }
304     }
305 
306     fileTruncate(target, nextStartPosition);
307     target.rawWrite(HapHeaderEnd);
308     foreach(_f; chunks)
309         target.rawWrite(_f.fileName~", "~to!string(_f.startPosition)~"\n");
310     target.rawWrite(HapHeaderStart);
311     target.close();
312 
313     if(toAppend.length != 0)
314         return appendAssetInPack(hapFile, toAppend,  basePath);
315     return HapHeaderStatus.SUCCESS;
316 }
317 
318 size_t getHeaderStart (string hapFile)
319 {
320     import std.file : exists, read;
321     if(exists(hapFile))
322     {
323         ubyte[] hapData = cast(ubyte[])read(hapFile);
324         getHeaderStart(hapData);
325     }
326     return 0;
327 }
328 size_t getHeaderStart (in ubyte[] fileData)
329 {
330     string header = "";
331     size_t i;
332     for(i = 0; i != HapHeaderEnd.length; i++)
333         header~= fileData[$-1-i];
334 
335     if(header != HapHeaderEnd)
336         return 0;
337     
338     ptrdiff_t z = 0;
339     i = fileData.length - i;
340     fileCapture: for(; i != 0; i--)
341     {
342         while(fileData[i-z] == HapHeaderStart[z])
343         {
344             z++;
345             if(z == HapHeaderStart.length)
346                 break fileCapture;
347         }
348         z = 0;
349     }
350     return i+1;
351 }
352 
353 HapChunk[] getHapChunks(in ubyte[] hapFile, size_t headerStart)
354 {
355     import hip.util.string : split;
356     import hip.util.conv : to;
357     import core.stdc.string : memcpy;
358     HapChunk[] ret;
359     string hap = "";
360     for(size_t i = headerStart; i < hapFile.length-HapHeaderStart.length; i++)
361         hap~= hapFile[i];
362     string[] infos = split(hap,  '\n');
363 
364     foreach(info; infos)
365     {
366         HapChunk h;
367         string[] temp = split(info, ", ");
368         if(temp.length == 0)
369             continue;
370         h.fileName = temp[0];
371         h.startPosition = to!size_t(temp[1]);
372         ret~= h;
373     }
374 
375     for(int i = 0; i < cast(int)ret.length-1; i++)
376     {
377         const size_t fileLength = ret[i+1].startPosition - ret[i].startPosition;
378         ret[i].bin = new ubyte[fileLength];
379         memcpy(ret[i].bin.ptr, hapFile.ptr+ret[i].startPosition, fileLength);
380     }
381 
382     //File length - headerLength
383     const size_t headerLength = (hapFile.length - headerStart);
384     ret[$-1].bin = new ubyte[hapFile.length - ret[$-1].startPosition - headerLength - HapHeaderEnd.length];
385     memcpy(ret[$-1].bin.ptr, hapFile.ptr+ret[$-1].startPosition, ret[$-1].bin.length);
386 
387     return ret;
388 
389 }
390 
391 HapChunk[] getHapChunks(string hapFilePath)
392 {
393     import std.stdio : File;
394     File f = File(hapFilePath);
395     ubyte[] hapFile = new ubyte[f.size];
396     f.rawRead(hapFile);
397     return getHapChunks(hapFile, getHeaderStart(hapFile));
398 }